.or_insert(Vec::new())
.push(("OUT_DIR".to_string(), out_dir));
- for (dst, link_dst, _linkable) in try!(cx.target_filenames(unit)) {
- for (filename, _linkable) in cx.target_filenames(unit)? {
- let dst = cx.out_dir(unit).join(filename);
++ for (dst, link_dst, _linkable) in cx.target_filenames(unit)? {
+ let bindst = match link_dst {
+ Some(link_dst) => link_dst,
+ None => dst.clone(),
+ };
+
if unit.profile.test {
cx.compilation.tests.push((unit.pkg.clone(),
unit.target.name().to_string(),
continue
}
- let v = try!(cx.target_filenames(unit));
+ let v = cx.target_filenames(unit)?;
- let v = v.into_iter().map(|(f, _)| {
- (unit.target.clone(), cx.out_dir(unit).join(f))
+ let v = v.into_iter().map(|(f, _, _)| {
+ (unit.target.clone(), f)
}).collect::<Vec<_>>();
cx.compilation.libraries.insert(pkgid.clone(), v);
}
// we've got everything constructed.
let p = profile::start(format!("preparing: {}/{}", unit.pkg,
unit.target.name()));
- try!(fingerprint::prepare_init(cx, unit));
- try!(cx.links.validate(unit));
+ fingerprint::prepare_init(cx, unit)?;
+ cx.links.validate(unit)?;
let (dirty, fresh, freshness) = if unit.profile.run_custom_build {
- try!(custom_build::prepare(cx, unit))
+ custom_build::prepare(cx, unit)?
} else {
- let (freshness, dirty, fresh) = try!(fingerprint::prepare_target(cx,
- unit));
+ let (freshness, dirty, fresh) = fingerprint::prepare_target(cx,
+ unit)?;
let work = if unit.profile.doc {
- try!(rustdoc(cx, unit))
+ rustdoc(cx, unit)?
} else {
- try!(rustc(cx, unit))
+ rustc(cx, unit)?
};
- let link_work1 = try!(link_targets(cx, unit));
- let link_work2 = try!(link_targets(cx, unit));
- let dirty = work.then(dirty);
++ let link_work1 = link_targets(cx, unit)?;
++ let link_work2 = link_targets(cx, unit)?;
+ // Need to link targets on both the dirty and fresh
+ let dirty = work.then(link_work1).then(dirty);
+ let fresh = link_work2.then(fresh);
(dirty, fresh, freshness)
};
- try!(jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness));
+ jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)?;
drop(p);
// Be sure to compile all dependencies of this target as well.
// FIXME(rust-lang/rust#18913): we probably shouldn't have to do
// this manually
- for &(ref filename, _linkable) in filenames.iter() {
- let dst = root.join(filename);
+ for &(ref dst, ref _link_dst, _linkable) in filenames.iter() {
if fs::metadata(&dst).is_ok() {
- try!(fs::remove_file(&dst).chain_error(|| {
+ fs::remove_file(&dst).chain_error(|| {
human(format!("Could not remove file: {}.", dst.display()))
- }));
+ })?;
}
}
rustc.exec()
}.chain_error(|| {
human(format!("Could not compile `{}`.", name))
- }));
+ })?;
if do_rename && real_name != crate_name {
- let dst = root.join(&filenames[0].0);
+ let dst = &filenames[0].0;
let src = dst.with_file_name(dst.file_name().unwrap()
.to_str().unwrap()
.replace(&real_name, &crate_name));
}
if !has_custom_args || fs::metadata(&rustc_dep_info_loc).is_ok() {
- try!(fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| {
+ info!("Renaming dep_info {:?} to {:?}", rustc_dep_info_loc, dep_info_loc);
+ fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| {
internal(format!("could not rename dep info: {:?}",
rustc_dep_info_loc))
- }));
- try!(fingerprint::append_current_dir(&dep_info_loc, &cwd));
+ })?;
+ fingerprint::append_current_dir(&dep_info_loc, &cwd)?;
}
- // If we're a "root crate", e.g. the target of this compilation, then we
- // hard link our outputs out of the `deps` directory into the directory
- // above. This means that `cargo build` will produce binaries in
- // `target/debug` which one probably expects.
- if move_outputs_up {
- for &(ref filename, _linkable) in filenames.iter() {
- let src = root.join(filename);
- // This may have been a `cargo rustc` command which changes the
- // output, so the source may not actually exist.
- if !src.exists() {
- continue
- }
-
- // We currently only lift files up from the `deps` directory. If
- // it was compiled into something like `example/` or `doc/` then
- // we don't want to link it up.
- let src_dir = src.parent().unwrap();
- if !src_dir.ends_with("deps") {
- continue
- }
- let dst = src_dir.parent().unwrap()
- .join(src.file_name().unwrap());
- if dst.exists() {
- fs::remove_file(&dst).chain_error(|| {
- human(format!("failed to remove: {}", dst.display()))
- })?;
- }
- fs::hard_link(&src, &dst)
- .or_else(|_| fs::copy(&src, &dst).map(|_| ()))
- .chain_error(|| {
- human(format!("failed to link or copy `{}` to `{}`",
- src.display(), dst.display()))
- })?;
- }
- }
-
Ok(())
}));
}
}
- let filenames = try!(cx.target_filenames(unit));
+/// Link the compiled target (often of form foo-{metadata_hash}) to the
+/// final target. This must happen during both "Fresh" and "Compile"
+fn link_targets(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
- try!(fs::remove_file(&dst).chain_error(|| {
++ let filenames = cx.target_filenames(unit)?;
+ Ok(Work::new(move |_| {
+ // If we're a "root crate", e.g. the target of this compilation, then we
+ // hard link our outputs out of the `deps` directory into the directory
+ // above. This means that `cargo build` will produce binaries in
+ // `target/debug` which one probably expects.
+ for (src, link_dst, _linkable) in filenames {
+ // This may have been a `cargo rustc` command which changes the
+ // output, so the source may not actually exist.
+ debug!("Thinking about linking {} to {:?}", src.display(), link_dst);
+ if !src.exists() || link_dst.is_none() {
+ continue
+ }
+ let dst = link_dst.unwrap();
+
+ debug!("linking {} to {}", src.display(), dst.display());
+ if dst.exists() {
- }));
++ fs::remove_file(&dst).chain_error(|| {
+ human(format!("failed to remove: {}", dst.display()))
- try!(fs::hard_link(&src, &dst)
++ })?;
+ }
- }));
++ fs::hard_link(&src, &dst)
+ .or_else(|err| {
+ debug!("hard link failed {}. falling back to fs::copy", err);
+ fs::copy(&src, &dst).map(|_| ())
+ })
+ .chain_error(|| {
+ human(format!("failed to link or copy `{}` to `{}`",
+ src.display(), dst.display()))
++ })?;
+ }
+ Ok(())
+ }))
+}
+
fn load_build_deps(cx: &Context, unit: &Unit) -> Option<Arc<BuildScripts>> {
cx.build_scripts.get(unit).cloned()
}
fn link_to(cmd: &mut ProcessBuilder, cx: &Context, unit: &Unit)
-> CargoResult<()> {
- for (dst, _link_dst, linkable) in try!(cx.target_filenames(unit)) {
- for (filename, linkable) in cx.target_filenames(unit)? {
++ for (dst, _link_dst, linkable) in cx.target_filenames(unit)? {
if !linkable {
continue
}